The following functions will be used with TensorFlow to help preprocess the data. They allow you to build the feature vector for a neural network.
from collections.abc import Sequence
from sklearn import preprocessing
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import shutil
import os
# Encode text values to dummy variables(i.e. [1,0,0],[0,1,0],[0,0,1] for red,green,blue)
def encode_text_dummy(df, name):
dummies = pd.get_dummies(df[name])
for x in dummies.columns:
dummy_name = "{}-{}".format(name, x)
df[dummy_name] = dummies[x]
df.drop(name, axis=1, inplace=True)
# Encode text values to a single dummy variable. The new columns (which do not replace the old) will have a 1
# at every location where the original column (name) matches each of the target_values. One column is added for
# each target value.
def encode_text_single_dummy(df, name, target_values):
for tv in target_values:
l = list(df[name].astype(str))
l = [1 if str(x) == str(tv) else 0 for x in l]
name2 = "{}-{}".format(name, tv)
df[name2] = l
# Encode text values to indexes(i.e. [1],[2],[3] for red,green,blue).
def encode_text_index(df, name):
le = preprocessing.LabelEncoder()
df[name] = le.fit_transform(df[name])
return le.classes_
# Encode a numeric column as zscores
def encode_numeric_zscore(df, name, mean=None, sd=None):
if mean is None:
mean = df[name].mean()
if sd is None:
sd = df[name].std()
df[name] = (df[name] - mean) / sd
# Convert all missing values in the specified column to the median
def missing_median(df, name):
med = df[name].median()
df[name] = df[name].fillna(med)
# Convert all missing values in the specified column to the default
def missing_default(df, name, default_value):
df[name] = df[name].fillna(default_value)
# Convert a Pandas dataframe to the x,y inputs that TensorFlow needs
def to_xy(df, target):
result = []
for x in df.columns:
if x != target:
result.append(x)
# find out the type of the target column.
target_type = df[target].dtypes
target_type = target_type[0] if isinstance(target_type, Sequence) else target_type
# Encode to int for classification, float otherwise. TensorFlow likes 32 bits.
if target_type in (np.int64, np.int32):
# Classification
dummies = pd.get_dummies(df[target])
return df[result].values.astype(np.float32), dummies.values.astype(np.float32)
else:
# Regression
return df[result].values.astype(np.float32), df[target].values.astype(np.float32)
# Nicely formatted time string
def hms_string(sec_elapsed):
h = int(sec_elapsed / (60 * 60))
m = int((sec_elapsed % (60 * 60)) / 60)
s = sec_elapsed % 60
return "{}:{:>02}:{:>05.2f}".format(h, m, s)
# Regression chart.
def chart_regression(pred,y,sort=True):
t = pd.DataFrame({'pred' : pred, 'y' : y.flatten()})
if sort:
t.sort_values(by=['y'],inplace=True)
a = plt.plot(t['y'].tolist(),label='expected')
b = plt.plot(t['pred'].tolist(),label='prediction')
plt.ylabel('output')
plt.legend()
plt.show()
# Remove all rows where the specified column is +/- sd standard deviations
def remove_outliers(df, name, sd):
drop_rows = df.index[(np.abs(df[name] - df[name].mean()) >= (sd * df[name].std()))]
df.drop(drop_rows, axis=0, inplace=True)
# Encode a column to a range between normalized_low and normalized_high.
def encode_numeric_range(df, name, normalized_low=-1, normalized_high=1,
data_low=None, data_high=None):
if data_low is None:
data_low = min(df[name])
data_high = max(df[name])
df[name] = ((df[name] - data_low) / (data_high - data_low)) \
* (normalized_high - normalized_low) + normalized_low
import tensorflow as tf
import numpy as np
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Activation
from tensorflow.keras.callbacks import EarlyStopping
from sklearn import metrics
x = np.sort((360 * np.random.rand(1000, 1)), axis=0)
y = np.array([np.pi * np.sin(x*(np.pi/180.0)).ravel(), np.pi * np.cos(x*(np.pi/180.0)).ravel()]).T
x
9.63873034],
[ 50.25974811],
[ 50.28574895],
[ 50.32706459],
[ 51.66199246],
[ 51.73722194],
[ 51.87292149],
[ 51.87596194],
[ 51.98420994],
[ 52.15178039],
[ 52.8492042 ],
[ 52.99591376],
[ 53.37555052],
[ 53.38727203],
[ 53.49703661],
[ 53.68111825],
[ 53.69408489],
[ 53.81530815],
[ 54.95408255],
[ 55.30612918],
[ 55.92329744],
[ 56.08343846],
[ 57.47555695],
[ 59.70298405],
[ 60.25417404],
[ 61.19688285],
[ 61.84269079],
[ 61.97610815],
[ 62.38609706],
[ 62.51766836],
[ 62.89461027],
[ 63.19352189],
[ 64.38172524],
[ 65.05620975],
[ 65.4814725 ],
[ 65.50387165],
[ 65.90090294],
[ 66.09228278],
[ 66.2177781 ],
[ 66.31847394],
[ 66.32167873],
[ 66.44677337],
[ 66.63774835],
[ 66.88133346],
[ 67.39588642],
[ 67.58275403],
[ 67.72020861],
[ 68.28515808],
[ 68.30233854],
[ 68.38614747],
[ 68.43667696],
[ 68.74352297],
[ 68.77996023],
[ 68.88022117],
[ 68.89391299],
[ 70.01931111],
[ 70.02176768],
[ 70.2598985 ],
[ 70.50766306],
[ 70.79113727],
[ 70.90242229],
[ 70.91328243],
[ 70.95925428],
[ 71.12349684],
[ 71.20879319],
[ 72.63048971],
[ 72.6605347 ],
[ 72.67951098],
[ 73.1670121 ],
[ 73.93459572],
[ 74.68797403],
[ 75.15076704],
[ 75.62177294],
[ 75.78126218],
[ 76.02240497],
[ 76.2931359 ],
[ 76.68111663],
[ 76.69975741],
[ 77.91271837],
[ 78.60756068],
[ 78.68325539],
[ 79.27373335],
[ 79.72499458],
[ 80.04159113],
[ 80.27862571],
[ 80.31912857],
[ 80.57743679],
[ 81.01099356],
[ 81.69898918],
[ 82.18963334],
[ 82.61157968],
[ 82.62137203],
[ 83.20267 ],
[ 83.75457182],
[ 84.03541798],
[ 84.2579935 ],
[ 84.77008976],
[ 85.23273703],
[ 85.7005494 ],
[ 85.83140282],
[ 86.58849308],
[ 86.93975929],
[ 88.15295988],
[ 88.35576196],
[ 88.72050692],
[ 88.82955031],
[ 89.71432873],
[ 91.13385481],
[ 91.15924197],
[ 91.2743815 ],
[ 91.38479947],
[ 91.52546646],
[ 91.75450962],
[ 91.95951655],
[ 92.29188872],
[ 92.35825143],
[ 92.36153629],
[ 92.5160616 ],
[ 92.75588017],
[ 92.86105577],
[ 93.12188778],
[ 93.21835117],
[ 93.22740053],
[ 93.35329499],
[ 93.67609071],
[ 93.95230229],
[ 94.07864623],
[ 94.24467247],
[ 94.42689692],
[ 95.5004519 ],
[ 96.06580492],
[ 96.49804493],
[ 96.58323879],
[ 97.11033702],
[ 97.91739932],
[ 98.39108059],
[ 98.82349985],
[ 98.87935649],
[ 99.0934767 ],
[ 99.51992208],
[ 99.55627135],
[ 99.65335168],
[ 99.78668483],
[100.42649434],
[101.84811272],
[101.97915784],
[101.98502398],
[102.27847224],
[102.61309505],
[102.68705176],
[102.9844614 ],
[102.98491525],
[103.02649463],
[103.37239154],
[103.53464156],
[104.38046022],
[104.60201244],
[104.76520427],
[104.81146783],
[104.99599453],
[105.18089626],
[105.31726599],
[105.39000795],
[105.94372347],
[106.66495472],
[106.68329541],
[106.79911776],
[107.30234039],
[107.45860991],
[107.6478686 ],
[108.07018714],
[108.40872157],
[108.441573 ],
[108.75975302],
[109.05518058],
[109.78636644],
[110.12090076],
[110.49689393],
[111.31901981],
[111.35730778],
[111.36289334],
[112.3192176 ],
[112.68806035],
[112.95323133],
[114.64095752],
[114.6912684 ],
[114.73802734],
[115.06080057],
[115.65110097],
[115.67305381],
[115.97146912],
[115.99133275],
[116.23982315],
[116.55236249],
[116.68940641],
[117.107279 ],
[117.86516751],
[118.18286705],
[118.46379767],
[118.64380665],
[119.16143202],
[119.30370151],
[119.93422966],
[120.20173599],
[120.29711874],
[120.76386136],
[121.02319658],
[121.75830821],
[121.97040833],
[122.0697525 ],
[122.75817872],
[122.85197975],
[122.90183268],
[123.33381924],
[123.59761885],
[124.11913124],
[124.15295685],
[124.55646067],
[124.95270009],
[125.30577126],
[125.62423748],
[125.84244607],
[126.07361885],
[126.91479866],
[127.15559787],
[127.48704659],
[128.0056827 ],
[129.53629466],
[129.72117939],
[129.80678765],
[130.10469754],
[130.55090938],
[130.60895381],
[131.79430486],
[131.89388755],
[132.69978238],
[132.73064661],
[132.87131108],
[133.09132218],
[133.68293301],
[133.78221015],
[133.9869397 ],
[134.07755223],
[134.13221332],
[134.13605612],
[134.22506466],
[135.09909855],
[135.39771363],
[135.52712236],
[136.01876011],
[136.03323459],
[136.14308075],
[136.18124569],
[136.32563625],
[137.25250446],
[138.28716149],
[138.63080233],
[139.4999943 ],
[139.5750677 ],
[140.43268751],
[140.58936191],
[141.04661431],
[141.04874333],
[141.06893295],
[141.43353301],
[141.84871321],
[141.84912845],
[142.15730251],
[142.20056324],
[142.5628618 ],
[142.5850761 ],
[143.00587758],
[143.68808471],
[144.36263352],
[144.61971855],
[144.68479279],
[144.6898849 ],
[144.82319746],
[144.92453745],
[145.08947049],
[145.61540366],
[145.74335729],
[145.90070468],
[146.35128927],
[147.57347261],
[148.42930197],
[148.59156391],
[148.70432667],
[149.06614148],
[149.58287594],
[149.92167878],
[150.3353651 ],
[150.93796541],
[151.35355566],
[151.58611709],
[151.68625362],
[152.37069831],
[152.72548791],
[153.66638567],
[154.22965623],
[154.23749635],
[154.31251621],
[154.90575203],
[155.04081583],
[155.77015192],
[156.63371581],
[156.91158444],
[157.44747548],
[158.25198048],
[158.51474959],
[158.61725302],
[158.76038116],
[158.77976744],
[159.04941324],
[159.39270209],
[159.81137968],
[160.08242783],
[160.3995511 ],
[161.20467989],
[161.23927603],
[161.9204699 ],
[162.35310955],
[162.46552668],
[162.53307276],
[162.56219507],
[162.76252397],
[162.84730433],
[163.22588454],
[163.60366483],
[163.63159774],
[163.74543776],
[163.784291 ],
[163.99851198],
[164.05414573],
[164.51608208],
[164.59063752],
[165.483714 ],
[165.76500204],
[165.9518132 ],
[166.29650699],
[166.45145453],
[166.52868572],
[166.57668298],
[167.10624703],
[167.25220674],
[167.72218629],
[167.85383646],
[167.93637807],
[168.09175781],
[168.30511833],
[168.64407056],
[169.36747785],
[169.80684385],
[169.80855956],
[169.86334222],
[170.10373703],
[170.79516716],
[171.1530904 ],
[171.17670614],
[171.23207911],
[171.29646911],
[172.11867145],
[172.71439879],
[172.77725911],
[172.89809265],
[173.93702759],
[174.45492693],
[174.57083381],
[175.00898343],
[175.14055722],
[175.25041543],
[175.44942524],
[175.61125385],
[175.77785402],
[175.81889065],
[176.01086844],
[176.19268158],
[176.37457341],
[178.48638729],
[178.8439421 ],
[180.03716817],
[180.03939114],
[180.28237207],
[180.49044524],
[180.95671294],
[181.00433316],
[181.55463863],
[182.01080565],
[182.22553499],
[182.47052837],
[182.54528634],
[182.84908176],
[183.46542271],
[183.61452605],
[183.94438857],
[184.52012244],
[184.99039152],
[185.29200757],
[185.60132558],
[185.7931864 ],
[186.4145458 ],
[186.59754344],
[187.43026045],
[187.61976959],
[188.38555038],
[190.57675434],
[191.00257551],
[191.04546094],
[191.64922022],
[191.68432517],
[192.45949848],
[192.66190591],
[192.89891343],
[192.94564845],
[193.80525677],
[194.04937089],
[194.09127654],
[194.5767056 ],
[194.82583295],
[195.09691882],
[195.42909812],
[195.75736904],
[196.56105775],
[196.84405831],
[197.1529273 ],
[197.38846915],
[198.06455207],
[198.10195367],
[198.18655706],
[198.18797967],
[198.38857846],
[198.45203294],
[198.82544328],
[198.98794519],
[199.23249249],
[199.76917616],
[199.94523058],
[200.05374238],
[200.2890476 ],
[200.40293049],
[200.4505971 ],
[200.58751671],
[200.8618923 ],
[201.07388735],
[201.47493349],
[201.69632925],
[201.89387478],
[201.98924322],
[202.37661359],
[202.4118709 ],
[202.66276547],
[202.71011764],
[203.2950025 ],
[203.35022291],
[203.48755808],
[203.50237226],
[203.87701463],
[203.90384619],
[204.08762726],
[204.2346828 ],
[204.54074203],
[204.87660299],
[205.54985717],
[205.96852635],
[206.06156402],
[206.65878031],
[206.69042655],
[207.32530667],
[207.46228518],
[208.47877481],
[208.81119949],
[209.23841436],
[209.44316895],
[209.5995774 ],
[210.75556874],
[211.35495964],
[211.43366842],
[212.05566758],
[212.34257735],
[212.51776514],
[212.70274065],
[212.78544052],
[212.94472137],
[213.01285659],
[213.78462514],
[214.30299488],
[214.37463978],
[214.63419097],
[214.82050592],
[215.87333767],
[216.28051444],
[216.53520988],
[216.97537508],
[217.36811958],
[217.71837314],
[217.94058283],
[218.28857727],
[218.48386434],
[218.51831885],
[218.67058111],
[218.80031259],
[218.92395032],
[218.92443453],
[220.64494865],
[220.67099799],
[220.81392335],
[221.70743415],
[221.82215994],
[221.89233658],
[222.21079976],
[223.9206135 ],
[224.63242675],
[224.85209828],
[225.20381797],
[225.60380727],
[225.61573665],
[225.62520259],
[226.00083586],
[226.8106423 ],
[227.21382786],
[227.52025636],
[228.19787291],
[228.53617448],
[228.60361001],
[228.81233533],
[228.96068429],
[229.40497882],
[229.4083274 ],
[229.77907189],
[230.33375755],
[230.97847298],
[231.06426031],
[231.42563348],
[231.86893702],
[231.94391195],
[231.99819616],
[232.09527321],
[232.43910032],
[232.54279774],
[233.3326886 ],
[233.86487351],
[234.07195373],
[234.38105981],
[234.48400277],
[234.77947225],
[234.82173163],
[234.89572885],
[235.07668822],
[235.13718721],
[235.31773735],
[235.6957474 ],
[236.42353581],
[236.454847 ],
[236.48547836],
[236.79752734],
[237.20511771],
[237.48500125],
[237.61046096],
[237.9654179 ],
[238.81223535],
[238.9059995 ],
[239.59949146],
[240.09152849],
[241.47262632],
[241.77142915],
[242.07274558],
[242.24534016],
[242.86617269],
[242.88777942],
[242.9416704 ],
[243.14592834],
[243.62840154],
[243.72119023],
[244.55783409],
[244.59961874],
[245.50516205],
[245.82911244],
[245.863614 ],
[246.41428989],
[246.58341091],
[246.99280755],
[247.1081976 ],
[247.20815135],
[247.9249025 ],
[248.53312784],
[248.5727899 ],
[248.5928544 ],
[248.98319725],
[250.66510523],
[250.76065508],
[251.07707024],
[251.54879464],
[252.29126305],
[252.58398933],
[252.83770722],
[253.11952533],
[253.66843653],
[253.80910892],
[254.29423064],
[254.53871116],
[254.5907138 ],
[255.91526568],
[256.26469518],
[256.38388551],
[257.93209749],
[257.93291008],
[258.17175691],
[258.28684502],
[258.38759682],
[258.6610299 ],
[258.69311435],
[258.95219237],
[259.01831031],
[259.04665261],
[259.32569801],
[259.54401564],
[259.62776428],
[260.37353551],
[260.96084198],
[262.37045465],
[262.42671258],
[262.60282839],
[262.87046332],
[262.90789405],
[262.94560743],
[263.66733258],
[264.36116281],
[265.73053071],
[266.20068844],
[267.19051542],
[267.67406338],
[268.10499622],
[268.3448472 ],
[268.49242654],
[268.50615329],
[268.72678217],
[269.8110038 ],
[270.32637414],
[270.49024379],
[271.16377825],
[271.19356485],
[271.62443606],
[272.25723649],
[273.06804774],
[273.35291371],
[273.37353304],
[273.42245327],
[273.59794491],
[273.93156681],
[274.30409047],
[274.43621417],
[274.53570566],
[274.58494771],
[274.64813397],
[274.70927091],
[275.15606862],
[277.67772356],
[277.8598481 ],
[278.97296282],
[279.15480989],
[279.21932388],
[279.51926622],
[279.57114819],
[279.94133747],
[280.09222998],
[280.32536146],
[280.51546121],
[280.62263924],
[281.60152862],
[281.70138053],
[281.75474822],
[282.45070875],
[283.1144963 ],
[283.31426485],
[283.78945506],
[283.80367727],
[284.06002579],
[284.2299819 ],
[284.36855927],
[284.63322606],
[284.75058943],
[285.88021694],
[285.9482047 ],
[287.23519336],
[287.3396529 ],
[287.69154939],
[287.87446391],
[288.15051418],
[288.41942545],
[288.57100098],
[288.59611792],
[288.63134999],
[289.32383013],
[289.70991887],
[289.84310532],
[289.91887978],
[290.92164315],
[291.34597144],
[291.50550675],
[292.07215696],
[292.46197077],
[292.57665018],
[292.67369193],
[292.8404521 ],
[293.40321652],
[293.40663812],
[293.59328167],
[294.28675833],
[294.39994778],
[295.30903204],
[295.5513396 ],
[296.14038946],
[296.43846561],
[296.52566509],
[297.99334728],
[298.63622866],
[298.76418531],
[299.13548982],
[299.47506813],
[299.66212697],
[299.84777632],
[299.86961712],
[300.06373155],
[300.13450967],
[301.08616997],
[301.1260479 ],
[301.4578325 ],
[302.3910357 ],
[302.39443041],
[302.55422874],
[302.8847596 ],
[303.42957322],
[303.74140655],
[303.76475681],
[304.25043489],
[304.4817361 ],
[305.01610391],
[305.03678039],
[305.06421452],
[305.48640798],
[305.48682668],
[306.58670271],
[306.68978915],
[307.04912465],
[307.11062719],
[307.24907119],
[307.30894084],
[307.89403941],
[309.15066697],
[309.22520261],
[309.80065015],
[310.26460509],
[310.39751844],
[311.14593733],
[311.2846571 ],
[311.73313685],
[311.78580401],
[312.04421147],
[312.1098471 ],
[312.13841865],
[312.33323749],
[312.33513341],
[312.47600644],
[313.13927163],
[313.54099279],
[313.57280703],
[314.14907337],
[314.63270776],
[315.33192581],
[316.34375455],
[317.16835241],
[317.32209539],
[317.67726048],
[317.68198448],
[318.33823149],
[318.74036268],
[318.76746183],
[319.0675438 ],
[319.36370643],
[319.76741557],
[320.45294591],
[320.59068804],
[320.60706952],
[320.72379074],
[320.87590966],
[321.74052911],
[321.86763391],
[322.17498318],
[323.60696452],
[324.0604327 ],
[325.14171154],
[325.40591347],
[325.73999908],
[326.14584495],
[326.68790883],
[326.98746554],
[327.44982919],
[328.08383149],
[328.08572745],
[328.66707752],
[328.91230466],
[329.39078065],
[329.57325067],
[330.0177761 ],
[330.05059045],
[330.193854 ],
[330.58827769],
[332.09138528],
[332.56837318],
[332.62119224],
[333.00358992],
[333.45211313],
[334.27033211],
[334.67279889],
[334.89162215],
[335.12312087],
[335.30001419],
[335.40549257],
[336.04876397],
[336.17183887],
[336.27978172],
[337.24158094],
[337.47595956],
[338.41500487],
[338.74355918],
[338.79507271],
[339.52179408],
[340.21409799],
[340.53798715],
[340.77186626],
[340.99166266],
[341.34759807],
[341.57083382],
[343.03157605],
[343.51469599],
[343.73957365],
[344.15437567],
[344.19949331],
[344.41803092],
[345.5892922 ],
[345.91305032],
[346.03914533],
[346.48685225],
[347.15846421],
[349.19338656],
[349.61341554],
[349.63366191],
[349.70873606],
[349.77261743],
[349.79378243],
[349.96922208],
[350.55077632],
[351.39597773],
[351.43924351],
[351.79027929],
[352.03802278],
[352.86562749],
[352.96733704],
[353.82164063],
[355.188492 ],
[355.74541733],
[355.81074215],
[355.90614399],
[356.97902386],
[357.39294128],
[357.42162727],
[357.7123942 ],
[357.94168042],
[358.57249765],
[358.95993897]])
y
array([[ 0.02147938, 3.14151922],
[ 0.05034107, 3.14118929],
[ 0.06780077, 3.14086094],
...,
[-0.11283573, 3.13956565],
[-0.07826348, 3.14061765],
[-0.0570246 , 3.14107507]])
model = Sequential()
model.add(Dense(100, input_dim=x.shape[1], activation='relu'))
model.add(Dense(50, activation='relu'))
model.add(Dense(25, activation='relu'))
model.add(Dense(2)) # Two output neurons
model.compile(loss='mean_squared_error', optimizer='adam')
model.fit(x,y,verbose=0,batch_size=32,epochs=100)
model.summary()
# Fit regression DNN model.
pred = model.predict(x)
Model: "sequential_2" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_8 (Dense) (None, 100) 200 _________________________________________________________________ dense_9 (Dense) (None, 50) 5050 _________________________________________________________________ dense_10 (Dense) (None, 25) 1275 _________________________________________________________________ dense_11 (Dense) (None, 2) 52 ================================================================= Total params: 6,577 Trainable params: 6,577 Non-trainable params: 0 _________________________________________________________________
score = np.sqrt(metrics.mean_squared_error(pred, y))
print("Score (RMSE): {}".format(score))
print()
print()
print("Predicted:")
print(np.array(pred[0:5]))
print("Expected:")
print(np.array(y[0:5]))
Score (RMSE): 0.6567297509717616 Predicted: [[0.0301346 3.0568657 ] [0.03190772 3.1213605 ] [0.0334944 3.1502976 ] [0.03365009 3.1514628 ] [0.06789796 3.195845 ]] Expected: [[0.02147938 3.14151922] [0.05034107 3.14118929] [0.06780077 3.14086094] [0.06866198 3.14084223] [0.10495698 3.13983892]]
The hidden layers of the neural network will have fewer neurons than the input/output neurons. Because there are fewer neurons, the auto-encoder must learn to encode the input to the fewer hidden neurons. The predictors (x) and output (y) are exactly the same in an auto encoder. Because of this, auto encoders are said to be unsupervised.

https://pillow.readthedocs.io/en/stable/index.html
from PIL import Image, ImageFile
from matplotlib.pyplot import imshow
import requests
from io import BytesIO
%matplotlib inline
url = "https://www.csus.edu/news/files/1_Campus_Spring_Flowers_Students_FB_20150330_0011.jpg"
response = requests.get(url)
img = Image.open(BytesIO(response.content))
img.load()
print(np.asarray(img))
print('shape: ', np.asarray(img).shape)
img
[[[ 55 59 0] [ 73 77 18] [ 54 60 0] ... [ 72 78 4] [ 75 81 9] [ 55 61 0]] [[102 104 41] [ 53 58 0] [ 66 71 4] ... [ 63 70 0] [ 82 88 16] [ 96 102 32]] [[129 130 62] [ 75 76 6] [ 83 87 13] ... [ 95 102 24] [123 129 55] [ 62 68 0]] ... [[ 71 53 31] [ 88 70 48] [ 55 38 18] ... [ 84 81 28] [165 162 109] [155 152 101]] [[100 79 62] [ 89 68 51] [ 95 76 59] ... [124 119 64] [134 129 74] [153 147 95]] [[154 131 117] [142 120 106] [150 128 115] ... [115 110 54] [106 101 45] [112 107 51]]] shape: (682, 1024, 3)
Pillow can also be used to create an image from a 3D numpy cube. The rows and columns specify the pixels. The depth, of 3, specifies red, green and blue. Here a simple image is created.
from PIL import Image
import numpy as np
w, h = 256, 256
data = np.zeros((h, w, 3), dtype=np.uint8)
# Yellow
for row in range(128):
for col in range(128):
data[row,col] = [255,255,0]
# Red
for row in range(128):
for col in range(128):
data[row+128,col] = [255,0,0]
# Green
for row in range(128):
for col in range(128):
data[row+128,col+128] = [0,255,0]
# Blue
for row in range(128):
for col in range(128):
data[row,col+128] = [0,0,255]
img = Image.fromarray(data, 'RGB')
img
We can combine the last two programs and modify images. Here we take the mean color of each pixel and form a grayscale image.
from PIL import Image, ImageFile
from matplotlib.pyplot import imshow
import requests
from io import BytesIO
%matplotlib inline
url = "https://www.csus.edu/news/files/1_Campus_Spring_Flowers_Students_FB_20150330_0011.jpg"
response = requests.get(url)
img = Image.open(BytesIO(response.content))
img.load()
img_array = np.asarray(img)
rows = img_array.shape[0]
cols = img_array.shape[1]
print("Rows: {}, Cols: {}".format(rows,cols))
# Create new image
img2_array = np.zeros((rows, cols, 3), dtype=np.uint8)
for row in range(rows):
for col in range(cols):
t = np.mean(img_array[row,col])
img2_array[row,col] = [t,t,t]
img2 = Image.fromarray(img2_array, 'RGB')
img2
Rows: 682, Cols: 1024
from PIL import Image, ImageFile
from matplotlib.pyplot import imshow
import requests
from io import BytesIO
%matplotlib inline
def add_noise(a):
a2 = a.copy()
rows = a2.shape[0]
cols = a2.shape[1]
s = int(min(rows,cols)/20) # size of spot is 1/20 of smallest dimension
for i in range(100):
x = np.random.randint(cols-s)
y = np.random.randint(rows-s)
a2[y:(y+s),x:(x+s)] = 0
return a2
url = "https://www.csus.edu/news/files/1_Campus_Spring_Flowers_Students_FB_20150330_0011.jpg"
response = requests.get(url)
img = Image.open(BytesIO(response.content))
img.load()
img_array = np.asarray(img)
rows = img_array.shape[0]
cols = img_array.shape[1]
print("Rows: {}, Cols: {}".format(rows,cols))
# Create new image by adding noise
img_array_noise = add_noise(img_array)
# make sure it is in uint8
img_array_noise= img_array_noise.astype(np.uint8)
print(img_array_noise.shape)
img2 = Image.fromarray(img_array_noise, 'RGB')
img2
Rows: 682, Cols: 1024 (682, 1024, 3)
A denoising auto encoder is designed to remove noise from input signals.
To do this the $y$ becomes each image while the $x$ becomes a version of $y$ with noise added.
https://pillow.readthedocs.io/en/3.1.x/reference/Image.html
We create 10 noisy versions of each image. The network is trained to convert noisy data ($x$) to the origional input ($y$).
%matplotlib inline
from PIL import Image, ImageFile
from matplotlib.pyplot import imshow
import requests
import numpy as np
from io import BytesIO
from IPython.display import display, HTML
images = [
"https://www.planetware.com/photos-large/USCA/california-san-francisco-golden-gate-bridge.jpg",
"https://upload.wikimedia.org/wikipedia/commons/8/87/Golden_Gate_Bridge_Aerial.jpg",
"https://www.travelbook.de/data/uploads/2017/09/gettyimages-585577624_1504953116-1040x690.jpg",
"https://avatars.mds.yandex.net/get-pdb/27625/ea43da96-acce-4910-860b-27f8b3612dd9/orig",
"https://www.visittheusa.com/sites/default/files/styles/hero_m_1300x700/public/images/hero_media_image/2017-05/23b0b0b9caaa07ee409b693da9bf9003.jpeg?itok=QUmOkiy_"
]
x = []
y = []
loaded_images = []
for url in images:
response = requests.get(url)
img = Image.open(BytesIO(response.content))
img.load()
img = img.resize((256,256), Image.LANCZOS)
loaded_images.append(img)
print(url)
display(img)
for i in range(10):
img_array = np.asarray(img)
img_array_noise = add_noise(img_array)
img_array = img_array.flatten()
img_array = img_array.astype(np.float32)
img_array = (img_array-256)/256
img_array_noise = img_array_noise.flatten()
img_array_noise = img_array_noise.astype(np.float32)
img_array_noise = (img_array_noise-256)/256
x.append(img_array_noise)
y.append(img_array)
x = np.array(x)
y = np.array(y)
print(x.shape)
print(y.shape)
https://www.planetware.com/photos-large/USCA/california-san-francisco-golden-gate-bridge.jpg
https://upload.wikimedia.org/wikipedia/commons/8/87/Golden_Gate_Bridge_Aerial.jpg
https://www.travelbook.de/data/uploads/2017/09/gettyimages-585577624_1504953116-1040x690.jpg
https://avatars.mds.yandex.net/get-pdb/27625/ea43da96-acce-4910-860b-27f8b3612dd9/orig
https://www.visittheusa.com/sites/default/files/styles/hero_m_1300x700/public/images/hero_media_image/2017-05/23b0b0b9caaa07ee409b693da9bf9003.jpeg?itok=QUmOkiy_
(50, 196608) (50, 196608)
%matplotlib inline
from PIL import Image, ImageFile
import requests
from io import BytesIO
from sklearn import metrics
import numpy as np
import pandas as pd
import tensorflow as tf
from IPython.display import display, HTML
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Activation
from tensorflow.keras.callbacks import EarlyStopping
# Fit regression DNN model.
print("Creating/Training neural network")
model = Sequential()
model.add(Dense(100, input_dim=x.shape[1], activation='relu'))
model.add(Dense(50, activation='relu'))
model.add(Dense(100, activation='relu'))
model.add(Dense(x.shape[1])) # Multiple output neurons
model.compile(loss='mean_squared_error', optimizer='adam')
model.fit(x,y,verbose=1,epochs=200)
print("Neural network trained")
Creating/Training neural network Epoch 1/125 2/2 [==============================] - 0s 86ms/step - loss: 0.2939 Epoch 2/125 2/2 [==============================] - 0s 78ms/step - loss: 0.1704 Epoch 3/125 2/2 [==============================] - 0s 76ms/step - loss: 0.0783 Epoch 4/125 2/2 [==============================] - 0s 83ms/step - loss: 0.0617 Epoch 5/125 2/2 [==============================] - 0s 75ms/step - loss: 0.0484 Epoch 6/125 2/2 [==============================] - 0s 83ms/step - loss: 0.0456 Epoch 7/125 2/2 [==============================] - 0s 78ms/step - loss: 0.0418 Epoch 8/125 2/2 [==============================] - 0s 75ms/step - loss: 0.0395 Epoch 9/125 2/2 [==============================] - 0s 82ms/step - loss: 0.0386 Epoch 10/125 2/2 [==============================] - 0s 104ms/step - loss: 0.0374 Epoch 11/125 2/2 [==============================] - 0s 96ms/step - loss: 0.0372 Epoch 12/125 2/2 [==============================] - 0s 92ms/step - loss: 0.0363 Epoch 13/125 2/2 [==============================] - 0s 79ms/step - loss: 0.0358 Epoch 14/125 2/2 [==============================] - 0s 79ms/step - loss: 0.0352 Epoch 15/125 2/2 [==============================] - 0s 88ms/step - loss: 0.0345 Epoch 16/125 2/2 [==============================] - 0s 96ms/step - loss: 0.0332 Epoch 17/125 2/2 [==============================] - 0s 83ms/step - loss: 0.0322 Epoch 18/125 2/2 [==============================] - 0s 73ms/step - loss: 0.0306 Epoch 19/125 2/2 [==============================] - 0s 94ms/step - loss: 0.0288 Epoch 20/125 2/2 [==============================] - 0s 89ms/step - loss: 0.0267 Epoch 21/125 2/2 [==============================] - 0s 87ms/step - loss: 0.0237 Epoch 22/125 2/2 [==============================] - 0s 83ms/step - loss: 0.0208 Epoch 23/125 2/2 [==============================] - 0s 88ms/step - loss: 0.0181 Epoch 24/125 2/2 [==============================] - 0s 83ms/step - loss: 0.0174 Epoch 25/125 2/2 [==============================] - 0s 97ms/step - loss: 0.0153 Epoch 26/125 2/2 [==============================] - 0s 87ms/step - loss: 0.0200 Epoch 27/125 2/2 [==============================] - 0s 82ms/step - loss: 0.0290 Epoch 28/125 2/2 [==============================] - 0s 81ms/step - loss: 0.0299 Epoch 29/125 2/2 [==============================] - 0s 89ms/step - loss: 0.0295 Epoch 30/125 2/2 [==============================] - 0s 94ms/step - loss: 0.0369 Epoch 31/125 2/2 [==============================] - 0s 91ms/step - loss: 0.0167 Epoch 32/125 2/2 [==============================] - 0s 77ms/step - loss: 0.0280 Epoch 33/125 2/2 [==============================] - 0s 90ms/step - loss: 0.0114 Epoch 34/125 2/2 [==============================] - 0s 77ms/step - loss: 0.0184 Epoch 35/125 2/2 [==============================] - 0s 79ms/step - loss: 0.0144 Epoch 36/125 2/2 [==============================] - 0s 86ms/step - loss: 0.0088 Epoch 37/125 2/2 [==============================] - 0s 80ms/step - loss: 0.0096 Epoch 38/125 2/2 [==============================] - 0s 113ms/step - loss: 0.0057 Epoch 39/125 2/2 [==============================] - 0s 79ms/step - loss: 0.0053 Epoch 40/125 2/2 [==============================] - 0s 93ms/step - loss: 0.0048 Epoch 41/125 2/2 [==============================] - 0s 81ms/step - loss: 0.0022 Epoch 42/125 2/2 [==============================] - 0s 87ms/step - loss: 0.0026 Epoch 43/125 2/2 [==============================] - 0s 83ms/step - loss: 0.0022 Epoch 44/125 2/2 [==============================] - 0s 84ms/step - loss: 0.0018 Epoch 45/125 2/2 [==============================] - 0s 80ms/step - loss: 0.0017 Epoch 46/125 2/2 [==============================] - 0s 79ms/step - loss: 9.7387e-04 Epoch 47/125 2/2 [==============================] - 0s 76ms/step - loss: 0.0011 Epoch 48/125 2/2 [==============================] - 0s 81ms/step - loss: 8.2367e-04 Epoch 49/125 2/2 [==============================] - 0s 80ms/step - loss: 7.7536e-04 Epoch 50/125 2/2 [==============================] - 0s 77ms/step - loss: 6.9606e-04 Epoch 51/125 2/2 [==============================] - 0s 86ms/step - loss: 5.9715e-04 Epoch 52/125 2/2 [==============================] - 0s 78ms/step - loss: 4.6476e-04 Epoch 53/125 2/2 [==============================] - 0s 76ms/step - loss: 3.4166e-04 Epoch 54/125 2/2 [==============================] - 0s 80ms/step - loss: 3.7227e-04 Epoch 55/125 2/2 [==============================] - 0s 78ms/step - loss: 2.5825e-04 Epoch 56/125 2/2 [==============================] - 0s 82ms/step - loss: 2.1100e-04 Epoch 57/125 2/2 [==============================] - 0s 75ms/step - loss: 2.3268e-04 Epoch 58/125 2/2 [==============================] - 0s 75ms/step - loss: 1.7493e-04 Epoch 59/125 2/2 [==============================] - 0s 73ms/step - loss: 1.7653e-04 Epoch 60/125 2/2 [==============================] - 0s 73ms/step - loss: 1.3402e-04 Epoch 61/125 2/2 [==============================] - 0s 97ms/step - loss: 8.3260e-05 Epoch 62/125 2/2 [==============================] - 0s 91ms/step - loss: 1.3406e-04 Epoch 63/125 2/2 [==============================] - 0s 76ms/step - loss: 1.0137e-04 Epoch 64/125 2/2 [==============================] - 0s 77ms/step - loss: 8.1433e-05 Epoch 65/125 2/2 [==============================] - 0s 78ms/step - loss: 7.2254e-05 Epoch 66/125 2/2 [==============================] - 0s 68ms/step - loss: 6.1200e-05 Epoch 67/125 2/2 [==============================] - 0s 79ms/step - loss: 5.6477e-05 Epoch 68/125 2/2 [==============================] - 0s 84ms/step - loss: 3.8884e-05 Epoch 69/125 2/2 [==============================] - 0s 76ms/step - loss: 3.8048e-05 Epoch 70/125 2/2 [==============================] - 0s 85ms/step - loss: 2.8822e-05 Epoch 71/125 2/2 [==============================] - 0s 86ms/step - loss: 2.9124e-05 Epoch 72/125 2/2 [==============================] - 0s 79ms/step - loss: 3.3031e-05 Epoch 73/125 2/2 [==============================] - 0s 91ms/step - loss: 3.4229e-05 Epoch 74/125 2/2 [==============================] - 0s 91ms/step - loss: 2.8674e-05 Epoch 75/125 2/2 [==============================] - 0s 77ms/step - loss: 2.4229e-05 Epoch 76/125 2/2 [==============================] - 0s 79ms/step - loss: 2.3097e-05 Epoch 77/125 2/2 [==============================] - 0s 72ms/step - loss: 1.7612e-05 Epoch 78/125 2/2 [==============================] - 0s 81ms/step - loss: 1.6314e-05 Epoch 79/125 2/2 [==============================] - 0s 84ms/step - loss: 1.3775e-05 Epoch 80/125 2/2 [==============================] - 0s 82ms/step - loss: 1.6307e-05 Epoch 81/125 2/2 [==============================] - 0s 81ms/step - loss: 1.3400e-05 Epoch 82/125 2/2 [==============================] - 0s 99ms/step - loss: 1.2909e-05 Epoch 83/125 2/2 [==============================] - 0s 82ms/step - loss: 1.1757e-05 Epoch 84/125 2/2 [==============================] - 0s 76ms/step - loss: 1.2339e-05 Epoch 85/125 2/2 [==============================] - 0s 81ms/step - loss: 1.2417e-05 Epoch 86/125 2/2 [==============================] - 0s 75ms/step - loss: 1.2615e-05 Epoch 87/125 2/2 [==============================] - 0s 86ms/step - loss: 1.4040e-05 Epoch 88/125 2/2 [==============================] - 0s 76ms/step - loss: 1.4056e-05 Epoch 89/125 2/2 [==============================] - 0s 72ms/step - loss: 1.3691e-05 Epoch 90/125 2/2 [==============================] - 0s 72ms/step - loss: 1.4139e-05 Epoch 91/125 2/2 [==============================] - 0s 76ms/step - loss: 1.3817e-05 Epoch 92/125 2/2 [==============================] - 0s 80ms/step - loss: 1.2924e-05 Epoch 93/125 2/2 [==============================] - 0s 78ms/step - loss: 1.1549e-05 Epoch 94/125 2/2 [==============================] - 0s 84ms/step - loss: 1.0277e-05 Epoch 95/125 2/2 [==============================] - 0s 78ms/step - loss: 9.1729e-06 Epoch 96/125 2/2 [==============================] - 0s 71ms/step - loss: 8.2833e-06 Epoch 97/125 2/2 [==============================] - 0s 90ms/step - loss: 7.7101e-06 Epoch 98/125 2/2 [==============================] - 0s 80ms/step - loss: 6.9499e-06 Epoch 99/125 2/2 [==============================] - 0s 84ms/step - loss: 6.2782e-06 Epoch 100/125 2/2 [==============================] - 0s 78ms/step - loss: 5.9469e-06 Epoch 101/125 2/2 [==============================] - 0s 89ms/step - loss: 5.9291e-06 Epoch 102/125 2/2 [==============================] - 0s 79ms/step - loss: 5.9461e-06 Epoch 103/125 2/2 [==============================] - 0s 76ms/step - loss: 5.7599e-06 Epoch 104/125 2/2 [==============================] - 0s 72ms/step - loss: 4.9150e-06 Epoch 105/125 2/2 [==============================] - 0s 80ms/step - loss: 5.0589e-06 Epoch 106/125 2/2 [==============================] - 0s 79ms/step - loss: 5.0531e-06 Epoch 107/125 2/2 [==============================] - 0s 81ms/step - loss: 5.0897e-06 Epoch 108/125 2/2 [==============================] - 0s 74ms/step - loss: 4.9970e-06 Epoch 109/125 2/2 [==============================] - 0s 73ms/step - loss: 4.6941e-06 Epoch 110/125 2/2 [==============================] - 0s 74ms/step - loss: 4.6272e-06 Epoch 111/125 2/2 [==============================] - 0s 71ms/step - loss: 4.2196e-06 Epoch 112/125 2/2 [==============================] - 0s 100ms/step - loss: 4.2716e-06 Epoch 113/125 2/2 [==============================] - 0s 93ms/step - loss: 4.1800e-06 Epoch 114/125 2/2 [==============================] - 0s 80ms/step - loss: 3.8876e-06 Epoch 115/125 2/2 [==============================] - 0s 77ms/step - loss: 3.7825e-06 Epoch 116/125 2/2 [==============================] - 0s 84ms/step - loss: 3.6855e-06 Epoch 117/125 2/2 [==============================] - 0s 75ms/step - loss: 3.6106e-06 Epoch 118/125 2/2 [==============================] - 0s 75ms/step - loss: 3.5003e-06 Epoch 119/125 2/2 [==============================] - 0s 88ms/step - loss: 3.4930e-06 Epoch 120/125 2/2 [==============================] - 0s 80ms/step - loss: 3.3520e-06 Epoch 121/125 2/2 [==============================] - 0s 81ms/step - loss: 3.2340e-06 Epoch 122/125 2/2 [==============================] - 0s 73ms/step - loss: 3.2168e-06 Epoch 123/125 2/2 [==============================] - 0s 80ms/step - loss: 3.1785e-06 Epoch 124/125 2/2 [==============================] - 0s 82ms/step - loss: 3.0020e-06 Epoch 125/125 2/2 [==============================] - 0s 78ms/step - loss: 2.9582e-06 Neural network trained
for z in range(10):
print("*** Trial {}".format(z+1))
# Choose random image
i = np.random.randint(len(loaded_images))
img = loaded_images[i]
img_array = np.asarray(img)
cols, rows = img.size
# Add noise
img_array_noise = add_noise(img_array)
#Display noisy image
img2 = img_array_noise.astype(np.uint8)
img2 = Image.fromarray(img2, 'RGB')
print("Before auto encoder (with noise):")
display(img2)
# Send noisy image to auto encoder
img_array_noise = img_array_noise.flatten()
img_array_noise = img_array_noise.astype(np.float32)
img_array_noise = (img_array_noise-256)/256
img_array_noise = np.array([img_array_noise])
pred = model.predict(img_array_noise)[0]
# Display neural result
img_array2 = pred.reshape(rows,cols,3)
img_array2 = (img_array2*256)+256
img_array2 = img_array2.astype(np.uint8)
img2 = Image.fromarray(img_array2, 'RGB')
print("After auto encoder noise removal")
display(img2)
*** Trial 1 Before auto encoder (with noise):
After auto encoder noise removal
*** Trial 2 Before auto encoder (with noise):
After auto encoder noise removal
*** Trial 3 Before auto encoder (with noise):
After auto encoder noise removal
*** Trial 4 Before auto encoder (with noise):
After auto encoder noise removal
*** Trial 5 Before auto encoder (with noise):
After auto encoder noise removal
*** Trial 6 Before auto encoder (with noise):
After auto encoder noise removal
*** Trial 7 Before auto encoder (with noise):
After auto encoder noise removal
*** Trial 8 Before auto encoder (with noise):
After auto encoder noise removal
*** Trial 9 Before auto encoder (with noise):
After auto encoder noise removal
*** Trial 10 Before auto encoder (with noise):
After auto encoder noise removal